runtime.g.stack (field)
175 uses
runtime (current package)
cgo_sigaction.go#L54: case sp < g.stack.lo || sp >= g.stack.hi:
cgocall.go#L241: inBound := sp > g0.stack.lo && sp <= g0.stack.hi
cgocall.go#L269: g0.stack.hi = sp + 1024
cgocall.go#L270: g0.stack.lo = sp - 32*1024
cgocall.go#L289: g0.stack.lo = bounds[0]
cgocall.go#L290: g0.stack.hi = bounds[1]
cgocall.go#L294: g0.stackguard0 = g0.stack.lo + stackGuard
cgocall.go#L309: oldStack := gp.m.g0.stack
cgocall.go#L376: gp.m.g0.stack = oldStack
checkptr.go#L91: if gp := getg(); gp.stack.lo <= uintptr(p) && uintptr(p) < gp.stack.hi {
debugcall.go#L38: if sp := sys.GetCallerSP(); !(getg().stack.lo < sp && sp <= getg().stack.hi) {
mbitmap.go#L1904: if gp := getg(); gp.m.curg.stack.lo <= uintptr(p) && uintptr(p) < gp.m.curg.stack.hi {
mcleanup.go#L737: racefree(unsafe.Pointer(gp.stack.lo), gp.stack.hi-gp.stack.lo)
mcleanup.go#L739: racemalloc(unsafe.Pointer(gp.stack.lo), gp.stack.hi-gp.stack.lo)
mcleanup.go#L754: racefree(unsafe.Pointer(gp.stack.lo), gp.stack.hi-gp.stack.lo)
mcleanup.go#L756: racemalloc(unsafe.Pointer(gp.stack.lo), gp.stack.hi-gp.stack.lo)
mgc.go#L1984: if gp.stack.lo <= p2 && p2 < gp.stack.hi {
mgcmark.go#L315: stackfree(gp.stack)
mgcmark.go#L316: gp.stack.lo = 0
mgcmark.go#L317: gp.stack.hi = 0
mgcmark.go#L888: scannedSize := gp.stack.hi - sp
mgcmark.go#L905: state.stack = gp.stack
mgcmark.go#L912: print("scanning async preempted goroutine ", gp.goid, " stack [", hex(gp.stack.lo), ",", hex(gp.stack.hi), ")\n")
mgcmark.go#L1005: println(" dead stkobj at", hex(gp.stack.lo+uintptr(obj.off)), "of size", obj.r.size)
os_linux.go#L174: stk := unsafe.Pointer(mp.g0.stack.hi)
panic.go#L1230: if sp != 0 && (sp < gp.stack.lo || gp.stack.hi < sp) {
panic.go#L1231: print("recover: ", hex(sp), " not in [", hex(gp.stack.lo), ", ", hex(gp.stack.hi), "]\n")
preempt.go#L176: gp.stackguard0 = gp.stack.lo + stackGuard
preempt.go#L379: if sp < gp.stack.lo || sp-gp.stack.lo < asyncPreemptStack {
proc.go#L596: print("runtime: morestack on g0, stack [", hex(g.stack.lo), " ", hex(g.stack.hi), "], sp=", hex(g.sched.sp), ", called from\n")
proc.go#L902: gcrash.stack = stackalloc(16384)
proc.go#L903: gcrash.stackguard0 = gcrash.stack.lo + 1000
proc.go#L904: gcrash.stackguard1 = gcrash.stack.lo + 1000
proc.go#L1017: mp.gsignal.stackguard1 = mp.gsignal.stack.lo + stackGuard
proc.go#L1872: osStack := gp.stack.lo == 0
proc.go#L1882: size := gp.stack.hi
proc.go#L1886: gp.stack.hi = uintptr(noescape(unsafe.Pointer(&size)))
proc.go#L1887: gp.stack.lo = gp.stack.hi - size + 1024
proc.go#L1891: gp.stackguard0 = gp.stack.lo + stackGuard
proc.go#L2017: stackfree(mp.gsignal.stack)
proc.go#L2326: stackfree(freem.g0.stack)
proc.go#L2519: gp.sched.sp = gp.stack.hi
proc.go#L2665: g0.stack.hi = 0
proc.go#L2666: g0.stack.lo = 0
proc.go#L3353: gp.stackguard0 = gp.stack.lo + stackGuard
proc.go#L4457: gcController.addScannableStack(pp, -int64(gp.stack.hi-gp.stack.lo))
proc.go#L4600: if gp.syscallsp < gp.stack.lo || gp.stack.hi < gp.syscallsp {
proc.go#L4602: print("entersyscall inconsistent sp ", hex(gp.syscallsp), " [", hex(gp.stack.lo), ",", hex(gp.stack.hi), "]\n")
proc.go#L4606: if gp.syscallbp != 0 && gp.syscallbp < gp.stack.lo || gp.stack.hi < gp.syscallbp {
proc.go#L4608: print("entersyscall inconsistent bp ", hex(gp.syscallbp), " [", hex(gp.stack.lo), ",", hex(gp.stack.hi), "]\n")
proc.go#L4741: if gp.syscallsp < gp.stack.lo || gp.stack.hi < gp.syscallsp {
proc.go#L4746: print("entersyscallblock inconsistent sp ", hex(sp1), " ", hex(sp2), " ", hex(sp3), " [", hex(gp.stack.lo), ",", hex(gp.stack.hi), "]\n")
proc.go#L4751: if gp.syscallsp < gp.stack.lo || gp.stack.hi < gp.syscallsp {
proc.go#L4753: print("entersyscallblock inconsistent sp ", hex(sp), " ", hex(gp.sched.sp), " ", hex(gp.syscallsp), " [", hex(gp.stack.lo), ",", hex(gp.stack.hi), "]\n")
proc.go#L4757: if gp.syscallbp != 0 && gp.syscallbp < gp.stack.lo || gp.stack.hi < gp.syscallbp {
proc.go#L4759: print("entersyscallblock inconsistent bp ", hex(bp), " ", hex(gp.sched.bp), " ", hex(gp.syscallbp), " [", hex(gp.stack.lo), ",", hex(gp.stack.hi), "]\n")
proc.go#L4858: gp.stackguard0 = gp.stack.lo + stackGuard
proc.go#L5061: gp.stackguard0 = gp.stack.lo + stackGuard
proc.go#L5141: newg.stack = stackalloc(uint32(stacksize))
proc.go#L5143: newg.valgrindStackID = valgrindRegisterStack(unsafe.Pointer(newg.stack.lo), unsafe.Pointer(newg.stack.hi))
proc.go#L5146: newg.stackguard0 = newg.stack.lo + stackGuard
proc.go#L5150: *(*uintptr)(unsafe.Pointer(newg.stack.lo)) = 0
proc.go#L5189: if newg.stack.hi == 0 {
proc.go#L5199: sp := newg.stack.hi - totalSize
proc.go#L5243: gcController.addScannableStack(pp, int64(newg.stack.hi-newg.stack.lo))
proc.go#L5325: stksize := gp.stack.hi - gp.stack.lo
proc.go#L5329: stackfree(gp.stack)
proc.go#L5330: gp.stack.lo = 0
proc.go#L5331: gp.stack.hi = 0
proc.go#L5347: if gp.stack.lo == 0 {
proc.go#L5385: if gp.stack.lo != 0 && gp.stack.hi-gp.stack.lo != uintptr(startingStackSize) {
proc.go#L5390: stackfree(gp.stack)
proc.go#L5391: gp.stack.lo = 0
proc.go#L5392: gp.stack.hi = 0
proc.go#L5400: if gp.stack.lo == 0 {
proc.go#L5403: gp.stack = stackalloc(startingStackSize)
proc.go#L5405: gp.valgrindStackID = valgrindRegisterStack(unsafe.Pointer(gp.stack.lo), unsafe.Pointer(gp.stack.hi))
proc.go#L5408: gp.stackguard0 = gp.stack.lo + stackGuard
proc.go#L5411: racemalloc(unsafe.Pointer(gp.stack.lo), gp.stack.hi-gp.stack.lo)
proc.go#L5414: msanmalloc(unsafe.Pointer(gp.stack.lo), gp.stack.hi-gp.stack.lo)
proc.go#L5417: asanunpoison(unsafe.Pointer(gp.stack.lo), gp.stack.hi-gp.stack.lo)
proc.go#L5431: if gp.stack.lo == 0 {
runtime2.go#L402: stack stack // offset known to runtime/cgo
signal_unix.go#L557: if sp >= mp.gsignal.stack.lo && sp < mp.gsignal.stack.hi {
signal_unix.go#L569: if sp >= mp.g0.stack.lo && sp < mp.g0.stack.hi {
signal_unix.go#L580: st := stackt{ss_size: mp.g0.stack.hi - mp.g0.stack.lo}
signal_unix.go#L581: setSignalstackSP(&st, mp.g0.stack.lo)
signal_unix.go#L657: delayedSignal := *cgo_yield != nil && mp != nil && gsignal.stack == mp.g0.stack
signal_unix.go#L1114: print("mp.gsignal stack [", hex(mp.gsignal.stack.lo), " ", hex(mp.gsignal.stack.hi), "], ")
signal_unix.go#L1115: print("mp.g0 stack [", hex(mp.g0.stack.lo), " ", hex(mp.g0.stack.hi), "], sp=", hex(sp), "\n")
signal_unix.go#L1332: signalstack(&mp.gsignal.stack)
signal_unix.go#L1420: old.stack = gp.m.gsignal.stack
signal_unix.go#L1426: gp.m.gsignal.stack.lo = stsp
signal_unix.go#L1427: gp.m.gsignal.stack.hi = stsp + st.ss_size
signal_unix.go#L1439: gp.stack = st.stack
stack.go#L903: old := gp.stack
stack.go#L968: gp.stack = new
stack.go#L1042: " sp=", hex(gp.sched.sp), " stack=[", hex(gp.stack.lo), ", ", hex(gp.stack.hi), "]\n",
stack.go#L1079: gp.stackguard0 = gp.stack.lo + stackGuard
stack.go#L1084: if gp.stack.lo == 0 {
stack.go#L1092: if stackDebug >= 1 || sp < gp.stack.lo {
stack.go#L1093: print("runtime: newstack sp=", hex(sp), " stack=[", hex(gp.stack.lo), ", ", hex(gp.stack.hi), "]\n",
stack.go#L1097: if sp < gp.stack.lo {
stack.go#L1099: print("runtime: split stack overflow: ", hex(sp), " < ", hex(gp.stack.lo), "\n")
stack.go#L1130: oldsize := gp.stack.hi - gp.stack.lo
stack.go#L1139: used := gp.stack.hi - gp.sched.sp
stack.go#L1158: print("runtime: sp=", hex(sp), " stack=[", hex(gp.stack.lo), ", ", hex(gp.stack.hi), "]\n")
stack.go#L1236: if gp.stack.lo == 0 {
stack.go#L1268: oldsize := gp.stack.hi - gp.stack.lo
stack.go#L1280: avail := gp.stack.hi - gp.stack.lo
stack.go#L1281: if used := gp.stack.hi - gp.sched.sp + stackNosplit; used >= avail/4 {
string.go#L174: stk := getg().stack
traceallocfree.go#L80: trace.GoroutineStackExists(gp.stack.lo, gp.stack.hi-gp.stack.lo)
traceback.go#L204: tracebackHexdump(gp.stack, &frame, 0)
traceback.go#L467: tracebackHexdump(gp.stack, frame, 0)
traceback.go#L480: tracebackHexdump(gp.stack, frame, frame.sp)
traceback.go#L565: print("\tstack=[", hex(gp.stack.lo), "-", hex(gp.stack.hi), "\n")